All files / controllers AIController.js

0% Statements 0/53
0% Branches 0/28
0% Functions 0/3
0% Lines 0/41

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88                                                                                                                                                                               
/**
 * AI Controller - Handles AI configuration (OpenAI)
 * @module controllers/AIController
 */
 
const BaseController = require('./BaseController');
const { pool } = require('../config/database');
const logger = require('../config/logger');
 
class AIController extends BaseController {
  static async getConfig(req, res) {
    const connection = await pool.getConnection();
    try {
      const tenantId = req.user.tenantId;
 
      const [configs] = await connection.query(
        'SELECT * FROM ai_configurations WHERE tenant_id = ?',
        [tenantId]
      );
 
      res.json({ success: true, data: configs.length ? configs[0] : null });
    } catch (error) {
      logger.error('Error getting AI config:', error);
      res.status(500).json({ success: false, message: req.t('errors.internal_server_error'), error: error.message });
    } finally {
      connection.release();
    }
  }
 
  static async updateConfig(req, res) {
    const connection = await pool.getConnection();
    try {
      const tenantId = req.user.tenantId;
      const { openai_api_key, model, temperature, max_tokens, system_prompt, is_active } = req.body;
 
      const [existing] = await connection.query(
        'SELECT id FROM ai_configurations WHERE tenant_id = ?',
        [tenantId]
      );
 
      if (existing.length) {
        const updates = [];
        const params = [];
 
        if (openai_api_key) { updates.push('openai_api_key = ?'); params.push(openai_api_key); }
        if (model) { updates.push('model = ?'); params.push(model); }
        if (temperature !== undefined) { updates.push('temperature = ?'); params.push(temperature); }
        if (max_tokens) { updates.push('max_tokens = ?'); params.push(max_tokens); }
        if (system_prompt) { updates.push('system_prompt = ?'); params.push(system_prompt); }
        if (is_active !== undefined) { updates.push('is_active = ?'); params.push(is_active); }
 
        if (updates.length > 0) {
          updates.push('updated_at = NOW()');
          params.push(tenantId);
          await connection.query(`UPDATE ai_configurations SET ${updates.join(', ')} WHERE tenant_id = ?`, params);
        }
      } else {
        await connection.query(`
          INSERT INTO ai_configurations (tenant_id, openai_api_key, model, temperature, max_tokens, system_prompt, is_active, created_at, updated_at)
          VALUES (?, ?, ?, ?, ?, ?, ?, NOW(), NOW())
        `, [tenantId, openai_api_key || null, model || 'gpt-3.5-turbo', temperature || 0.7, max_tokens || 150, system_prompt || null, is_active !== false]);
      }
 
      logger.info(`AI config updated for tenant ${tenantId}`);
      res.json({ success: true, message: req.t('ai.config_updated') });
    } catch (error) {
      logger.error('Error updating AI config:', error);
      res.status(500).json({ success: false, message: req.t('errors.internal_server_error'), error: error.message });
    } finally {
      connection.release();
    }
  }
 
  static async testAI(req, res) {
    try {
      const { message } = req.body;
      // TODO: Implement actual OpenAI API test
      res.json({ success: true, message: req.t('ai.test_successful'), response: 'Test response' });
    } catch (error) {
      logger.error('Error testing AI:', error);
      res.status(500).json({ success: false, message: req.t('errors.internal_server_error'), error: error.message });
    }
  }
}
 
module.exports = AIController;